Coverage for fastblocks/_workflows_integration.py: 70%
163 statements
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-09 00:47 -0700
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-09 00:47 -0700
1"""ACB Workflows integration for FastBlocks.
3This module provides background job orchestration using ACB's Workflows system,
4with graceful degradation when ACB workflows are not available.
6Author: lesleslie <les@wedgwoodwebworks.com>
7Created: 2025-10-01
9Key Features:
10- Cache warming workflows (template and static file caching)
11- Template cleanup workflows (remove stale templates, optimize storage)
12- Performance optimization workflows (database query optimization, index maintenance)
13- Scheduled background tasks
14- Graceful degradation when Workflows unavailable
16Usage:
17 # Execute cache warming workflow
18 from fastblocks._workflows_integration import execute_cache_warming
19 result = await execute_cache_warming()
21 # Execute template cleanup workflow
22 from fastblocks._workflows_integration import execute_template_cleanup
23 result = await execute_template_cleanup()
25 # Execute performance optimization workflow
26 from fastblocks._workflows_integration import execute_performance_optimization
27 result = await execute_performance_optimization()
28"""
29# type: ignore # ACB workflows API stub - graceful degradation
31import typing as t
32from contextlib import suppress
33from datetime import datetime
35from acb.depends import depends
37# Try to import ACB workflows
38ACB_WORKFLOWS_AVAILABLE = False
39BasicWorkflowEngine = None
40WorkflowDefinition = None
41WorkflowStep = None
43with suppress(ImportError):
44 from acb.workflows import ( # type: ignore[no-redef]
45 BasicWorkflowEngine,
46 WorkflowDefinition,
47 WorkflowStep,
48 )
50 ACB_WORKFLOWS_AVAILABLE = True
53class FastBlocksWorkflowService:
54 """FastBlocks wrapper for ACB Workflows with graceful degradation."""
56 _instance: t.ClassVar["FastBlocksWorkflowService | None"] = None
58 def __new__(cls) -> "FastBlocksWorkflowService":
59 """Singleton pattern - ensure only one instance exists."""
60 if cls._instance is None:
61 cls._instance = super().__new__(cls) # type: ignore[misc]
62 return cls._instance
64 def __init__(self) -> None:
65 """Initialize workflow service with ACB integration."""
66 if not hasattr(self, "_initialized"):
67 self._engine: t.Any = None # BasicWorkflowEngine when ACB available
68 self._initialized = True
70 # Try to get ACB workflow engine
71 if ACB_WORKFLOWS_AVAILABLE and BasicWorkflowEngine:
72 with suppress(Exception):
73 self._engine = BasicWorkflowEngine(
74 max_concurrent_steps=3, # Conservative concurrency
75 enable_retry=True,
76 max_retries=2,
77 )
79 @property
80 def available(self) -> bool:
81 """Check if ACB Workflows is available."""
82 return ACB_WORKFLOWS_AVAILABLE and self._engine is not None
85# Singleton instance
86_workflow_service: FastBlocksWorkflowService | None = None
89def get_workflow_service() -> FastBlocksWorkflowService:
90 """Get the singleton FastBlocksWorkflowService instance."""
91 global _workflow_service
92 if _workflow_service is None:
93 _workflow_service = FastBlocksWorkflowService()
94 return _workflow_service
97async def execute_cache_warming(
98 warm_templates: bool = True,
99 warm_static: bool = True,
100 warm_routes: bool = True,
101) -> dict[str, t.Any]:
102 """Execute cache warming workflow.
104 Pre-loads frequently accessed resources into cache to improve performance.
106 Args:
107 warm_templates: Pre-cache commonly used templates
108 warm_static: Pre-cache static file metadata
109 warm_routes: Pre-cache route definitions
111 Returns:
112 Dictionary with workflow results
113 """
114 service = get_workflow_service()
116 if not service.available:
117 # Graceful degradation - manual cache warming
118 return await _manual_cache_warming(warm_templates, warm_static, warm_routes)
120 # Define workflow steps
121 steps = []
123 if warm_templates:
124 steps.append(
125 WorkflowStep( # type: ignore[operator]
126 step_id="warm_templates",
127 name="Warm Template Cache",
128 action="warm_template_cache",
129 params={},
130 retry_on_failure=True,
131 max_retries=2,
132 )
133 )
135 if warm_static:
136 steps.append(
137 WorkflowStep( # type: ignore[operator]
138 step_id="warm_static",
139 name="Warm Static File Cache",
140 action="warm_static_cache",
141 params={},
142 retry_on_failure=True,
143 max_retries=2,
144 )
145 )
147 if warm_routes:
148 steps.append(
149 WorkflowStep( # type: ignore[operator]
150 step_id="warm_routes",
151 name="Warm Route Cache",
152 action="warm_route_cache",
153 params={},
154 retry_on_failure=True,
155 max_retries=2,
156 )
157 )
159 # Create workflow definition
160 workflow = WorkflowDefinition( # type: ignore[operator]
161 workflow_id="cache-warming",
162 name="Cache Warming Workflow",
163 description="Pre-load frequently accessed resources into cache",
164 steps=steps,
165 max_execution_time=300, # 5 minutes max
166 )
168 # Execute workflow
169 result = await service._engine.execute(
170 workflow,
171 context={
172 "warm_templates": warm_templates,
173 "warm_static": warm_static,
174 "warm_routes": warm_routes,
175 },
176 action_handlers={
177 "warm_template_cache": _warm_template_cache,
178 "warm_static_cache": _warm_static_cache,
179 "warm_route_cache": _warm_route_cache,
180 },
181 )
183 return {
184 "workflow_id": workflow.workflow_id,
185 "state": result.state.value
186 if hasattr(result.state, "value")
187 else str(result.state),
188 "completed_at": datetime.now().isoformat(),
189 "steps_completed": len(
190 [s for s in result.step_results.values() if s.state == "completed"]
191 ),
192 "steps_failed": len(
193 [s for s in result.step_results.values() if s.state == "failed"]
194 ),
195 "errors": [s.error for s in result.step_results.values() if s.error],
196 }
199async def execute_template_cleanup(
200 remove_stale: bool = True,
201 optimize_storage: bool = True,
202 cleanup_cache: bool = True,
203) -> dict[str, t.Any]:
204 """Execute template cleanup workflow.
206 Removes stale templates, optimizes storage, and cleans up cache.
208 Args:
209 remove_stale: Remove templates not accessed in 30+ days
210 optimize_storage: Compress and optimize template storage
211 cleanup_cache: Clear unused template cache entries
213 Returns:
214 Dictionary with workflow results
215 """
216 service = get_workflow_service()
218 if not service.available:
219 # Graceful degradation - manual cleanup
220 return await _manual_template_cleanup(
221 remove_stale, optimize_storage, cleanup_cache
222 )
224 # Define workflow steps with dependencies
225 steps = []
227 if cleanup_cache:
228 steps.append(
229 WorkflowStep( # type: ignore[operator]
230 step_id="cleanup_cache",
231 name="Cleanup Template Cache",
232 action="cleanup_template_cache",
233 params={},
234 retry_on_failure=False,
235 )
236 )
238 if remove_stale:
239 steps.append(
240 WorkflowStep( # type: ignore[operator]
241 step_id="remove_stale",
242 name="Remove Stale Templates",
243 action="remove_stale_templates",
244 params={"days_threshold": 30},
245 depends_on=["cleanup_cache"] if cleanup_cache else [],
246 retry_on_failure=False,
247 )
248 )
250 if optimize_storage:
251 steps.append(
252 WorkflowStep( # type: ignore[operator]
253 step_id="optimize_storage",
254 name="Optimize Template Storage",
255 action="optimize_template_storage",
256 params={},
257 depends_on=["remove_stale"] if remove_stale else [],
258 retry_on_failure=True,
259 max_retries=2,
260 )
261 )
263 # Create workflow definition
264 workflow = WorkflowDefinition( # type: ignore[operator]
265 workflow_id="template-cleanup",
266 name="Template Cleanup Workflow",
267 description="Remove stale templates and optimize storage",
268 steps=steps,
269 max_execution_time=600, # 10 minutes max
270 )
272 # Execute workflow
273 result = await service._engine.execute(
274 workflow,
275 context={
276 "remove_stale": remove_stale,
277 "optimize_storage": optimize_storage,
278 "cleanup_cache": cleanup_cache,
279 },
280 action_handlers={
281 "cleanup_template_cache": _cleanup_template_cache,
282 "remove_stale_templates": _remove_stale_templates,
283 "optimize_template_storage": _optimize_template_storage,
284 },
285 )
287 return {
288 "workflow_id": workflow.workflow_id,
289 "state": result.state.value
290 if hasattr(result.state, "value")
291 else str(result.state),
292 "completed_at": datetime.now().isoformat(),
293 "steps_completed": len(
294 [s for s in result.step_results.values() if s.state == "completed"]
295 ),
296 "steps_failed": len(
297 [s for s in result.step_results.values() if s.state == "failed"]
298 ),
299 "errors": [s.error for s in result.step_results.values() if s.error],
300 }
303async def execute_performance_optimization(
304 optimize_queries: bool = True,
305 rebuild_indexes: bool = True,
306 cleanup_sessions: bool = True,
307) -> dict[str, t.Any]:
308 """Execute performance optimization workflow.
310 Optimizes database queries, rebuilds indexes, and cleans up sessions.
312 Args:
313 optimize_queries: Analyze and optimize slow queries
314 rebuild_indexes: Rebuild database indexes for optimal performance
315 cleanup_sessions: Clean up expired sessions
317 Returns:
318 Dictionary with workflow results
319 """
320 service = get_workflow_service()
322 if not service.available:
323 # Graceful degradation - manual optimization
324 return await _manual_performance_optimization(
325 optimize_queries, rebuild_indexes, cleanup_sessions
326 )
328 # Define workflow steps
329 steps = []
331 if cleanup_sessions:
332 steps.append(
333 WorkflowStep( # type: ignore[operator]
334 step_id="cleanup_sessions",
335 name="Cleanup Expired Sessions",
336 action="cleanup_expired_sessions",
337 params={"expiry_hours": 24},
338 retry_on_failure=False,
339 )
340 )
342 if optimize_queries:
343 steps.append(
344 WorkflowStep( # type: ignore[operator]
345 step_id="optimize_queries",
346 name="Optimize Database Queries",
347 action="optimize_database_queries",
348 params={},
349 retry_on_failure=True,
350 max_retries=2,
351 )
352 )
354 if rebuild_indexes:
355 steps.append(
356 WorkflowStep( # type: ignore[operator]
357 step_id="rebuild_indexes",
358 name="Rebuild Database Indexes",
359 action="rebuild_database_indexes",
360 params={},
361 depends_on=["optimize_queries"] if optimize_queries else [],
362 retry_on_failure=True,
363 max_retries=1,
364 )
365 )
367 # Create workflow definition
368 workflow = WorkflowDefinition( # type: ignore[operator]
369 workflow_id="performance-optimization",
370 name="Performance Optimization Workflow",
371 description="Optimize database and application performance",
372 steps=steps,
373 max_execution_time=900, # 15 minutes max
374 )
376 # Execute workflow
377 result = await service._engine.execute(
378 workflow,
379 context={
380 "optimize_queries": optimize_queries,
381 "rebuild_indexes": rebuild_indexes,
382 "cleanup_sessions": cleanup_sessions,
383 },
384 action_handlers={
385 "cleanup_expired_sessions": _cleanup_expired_sessions,
386 "optimize_database_queries": _optimize_database_queries,
387 "rebuild_database_indexes": _rebuild_database_indexes,
388 },
389 )
391 return {
392 "workflow_id": workflow.workflow_id,
393 "state": result.state.value
394 if hasattr(result.state, "value")
395 else str(result.state),
396 "completed_at": datetime.now().isoformat(),
397 "steps_completed": len(
398 [s for s in result.step_results.values() if s.state == "completed"]
399 ),
400 "steps_failed": len(
401 [s for s in result.step_results.values() if s.state == "failed"]
402 ),
403 "errors": [s.error for s in result.step_results.values() if s.error],
404 }
407# Action handler implementations
410async def _warm_template_cache(
411 context: dict[str, t.Any], params: dict[str, t.Any]
412) -> dict[str, t.Any]:
413 """Warm template cache by pre-loading commonly used templates."""
414 with suppress(Exception):
415 from .actions.gather import gather
417 # Gather all templates
418 templates_result = await gather.templates()
420 if templates_result and hasattr(templates_result, "templates"):
421 cached_count = 0
422 # Pre-cache template metadata (not full rendering)
423 cache = depends.get("cache")
424 if cache:
425 for template_name in list(templates_result.templates.keys())[
426 :50
427 ]: # Limit to top 50
428 cache_key = f"template:metadata:{template_name}"
429 await cache.set(
430 cache_key,
431 {
432 "name": template_name,
433 "warmed_at": datetime.now().isoformat(),
434 },
435 ttl=3600,
436 )
437 cached_count += 1
439 return {"templates_warmed": cached_count, "status": "completed"}
441 return {"templates_warmed": 0, "status": "skipped"}
444async def _warm_static_cache(
445 context: dict[str, t.Any], params: dict[str, t.Any]
446) -> dict[str, t.Any]:
447 """Warm static file cache by pre-loading metadata."""
448 # Static file warming would depend on static file adapter
449 return {"static_files_warmed": 0, "status": "skipped"}
452async def _warm_route_cache(
453 context: dict[str, t.Any], params: dict[str, t.Any]
454) -> dict[str, t.Any]:
455 """Warm route cache by pre-loading route definitions."""
456 with suppress(Exception):
457 from .actions.gather import gather
459 # Gather all routes
460 routes_result = await gather.routes()
462 if routes_result and hasattr(routes_result, "routes"):
463 cached_count = len(routes_result.routes)
464 return {"routes_warmed": cached_count, "status": "completed"}
466 return {"routes_warmed": 0, "status": "skipped"}
469async def _cleanup_template_cache(
470 context: dict[str, t.Any], params: dict[str, t.Any]
471) -> dict[str, t.Any]:
472 """Clean up unused template cache entries."""
473 with suppress(Exception):
474 cache = depends.get("cache")
475 if cache and hasattr(cache, "clear_pattern"):
476 # Clear stale template cache entries
477 await cache.clear_pattern("template:*")
478 return {"cache_cleared": True, "status": "completed"}
480 return {"cache_cleared": False, "status": "skipped"}
483async def _remove_stale_templates(
484 context: dict[str, t.Any], params: dict[str, t.Any]
485) -> dict[str, t.Any]:
486 """Remove templates not accessed in X days."""
487 days_threshold = params.get("days_threshold", 30)
489 # In production, would check template access logs
490 # For now, just return placeholder
491 return {
492 "templates_removed": 0,
493 "days_threshold": days_threshold,
494 "status": "completed",
495 }
498async def _optimize_template_storage(
499 context: dict[str, t.Any], params: dict[str, t.Any]
500) -> dict[str, t.Any]:
501 """Optimize template storage (compress, deduplicate)."""
502 # In production, would compress/optimize template files
503 return {"storage_optimized": False, "status": "skipped"}
506async def _cleanup_expired_sessions(
507 context: dict[str, t.Any], params: dict[str, t.Any]
508) -> dict[str, t.Any]:
509 """Clean up expired sessions."""
510 expiry_hours = params.get("expiry_hours", 24)
512 # In production, would clean up session storage
513 return {
514 "sessions_cleaned": 0,
515 "expiry_hours": expiry_hours,
516 "status": "completed",
517 }
520async def _optimize_database_queries(
521 context: dict[str, t.Any], params: dict[str, t.Any]
522) -> dict[str, t.Any]:
523 """Analyze and optimize slow database queries."""
524 # In production, would analyze query logs and optimize
525 return {"queries_optimized": 0, "status": "skipped"}
528async def _rebuild_database_indexes(
529 context: dict[str, t.Any], params: dict[str, t.Any]
530) -> dict[str, t.Any]:
531 """Rebuild database indexes for optimal performance."""
532 # In production, would rebuild database indexes
533 return {"indexes_rebuilt": 0, "status": "skipped"}
536# Manual fallback implementations (when ACB Workflows unavailable)
539async def _manual_cache_warming(
540 warm_templates: bool, warm_static: bool, warm_routes: bool
541) -> dict[str, t.Any]:
542 """Manual cache warming without workflow orchestration."""
543 results = {}
545 if warm_templates:
546 result = await _warm_template_cache({}, {})
547 results["templates"] = result
549 if warm_static:
550 result = await _warm_static_cache({}, {})
551 results["static"] = result
553 if warm_routes:
554 result = await _warm_route_cache({}, {})
555 results["routes"] = result
557 return {
558 "workflow_id": "cache-warming",
559 "state": "completed",
560 "completed_at": datetime.now().isoformat(),
561 "results": results,
562 "mode": "manual",
563 }
566async def _manual_template_cleanup(
567 remove_stale: bool, optimize_storage: bool, cleanup_cache: bool
568) -> dict[str, t.Any]:
569 """Manual template cleanup without workflow orchestration."""
570 results = {}
572 if cleanup_cache:
573 result = await _cleanup_template_cache({}, {})
574 results["cache_cleanup"] = result
576 if remove_stale:
577 result = await _remove_stale_templates({}, {"days_threshold": 30})
578 results["stale_removal"] = result
580 if optimize_storage:
581 result = await _optimize_template_storage({}, {})
582 results["storage_optimization"] = result
584 return {
585 "workflow_id": "template-cleanup",
586 "state": "completed",
587 "completed_at": datetime.now().isoformat(),
588 "results": results,
589 "mode": "manual",
590 }
593async def _manual_performance_optimization(
594 optimize_queries: bool, rebuild_indexes: bool, cleanup_sessions: bool
595) -> dict[str, t.Any]:
596 """Manual performance optimization without workflow orchestration."""
597 results = {}
599 if cleanup_sessions:
600 result = await _cleanup_expired_sessions({}, {"expiry_hours": 24})
601 results["session_cleanup"] = result
603 if optimize_queries:
604 result = await _optimize_database_queries({}, {})
605 results["query_optimization"] = result
607 if rebuild_indexes:
608 result = await _rebuild_database_indexes({}, {})
609 results["index_rebuild"] = result
611 return {
612 "workflow_id": "performance-optimization",
613 "state": "completed",
614 "completed_at": datetime.now().isoformat(),
615 "results": results,
616 "mode": "manual",
617 }
620async def register_fastblocks_workflows() -> bool:
621 """Register FastBlocks workflows with ACB.
623 Returns:
624 True if registration successful, False otherwise
625 """
626 if not ACB_WORKFLOWS_AVAILABLE:
627 return False
629 try:
630 # Initialize workflow service
631 workflow_service = get_workflow_service()
633 # Register with depends
634 depends.set("fastblocks_workflows", workflow_service)
636 return workflow_service.available
638 except Exception:
639 return False
642__all__ = [
643 "FastBlocksWorkflowService",
644 "get_workflow_service",
645 "execute_cache_warming",
646 "execute_template_cleanup",
647 "execute_performance_optimization",
648 "register_fastblocks_workflows",
649 "ACB_WORKFLOWS_AVAILABLE",
650]