Coverage for fastblocks/actions/sync/static.py: 13%

381 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2025-10-09 00:47 -0700

1"""Static files synchronization between filesystem, cloud storage, and selective caching. 

2 

3Static sync uses selective caching based on file type: 

4- Text-based files (CSS, JS, MD, TXT) are cached for performance 

5- Binary files (images, fonts, media) sync to storage only to avoid cache bloat 

6""" 

7 

8import typing as t 

9from pathlib import Path 

10 

11import yaml 

12from acb.debug import debug 

13from anyio import Path as AsyncPath 

14 

15from .strategies import ( 

16 ConflictStrategy, 

17 SyncDirection, 

18 SyncResult, 

19 SyncStrategy, 

20 create_backup, 

21 get_file_info, 

22 resolve_conflict, 

23 should_sync, 

24) 

25 

26 

27class StaticSyncResult(SyncResult): 

28 def __init__( 

29 self, 

30 *, 

31 assets_processed: list[str] | None = None, 

32 mime_types_detected: dict[str, str] | None = None, 

33 cache_invalidated: list[str] | None = None, 

34 cache_cleared: list[str] | None = None, 

35 cacheable_assets: list[str] | None = None, 

36 non_cacheable_assets: list[str] | None = None, 

37 **kwargs: t.Any, 

38 ) -> None: 

39 super().__init__(**kwargs) 

40 self.assets_processed = assets_processed if assets_processed is not None else [] 

41 self.mime_types_detected = ( 

42 mime_types_detected if mime_types_detected is not None else {} 

43 ) 

44 self.cache_invalidated = ( 

45 cache_invalidated if cache_invalidated is not None else [] 

46 ) 

47 self.cache_cleared = cache_cleared if cache_cleared is not None else [] 

48 self.cacheable_assets = cacheable_assets if cacheable_assets is not None else [] 

49 self.non_cacheable_assets = ( 

50 non_cacheable_assets if non_cacheable_assets is not None else [] 

51 ) 

52 

53 

54CACHEABLE_EXTENSIONS = {".css", ".js", ".md", ".txt"} 

55NON_CACHEABLE_EXTENSIONS = { 

56 ".png", 

57 ".jpg", 

58 ".jpeg", 

59 ".gif", 

60 ".svg", 

61 ".ico", 

62 ".webp", 

63 ".avif", 

64 ".mp4", 

65 ".mov", 

66 ".mp3", 

67 ".wav", 

68 ".pdf", 

69 ".zip", 

70 ".woff", 

71 ".woff2", 

72 ".ttf", 

73 ".eot", 

74 ".otf", 

75} 

76 

77 

78def _is_cacheable_file(file_path: AsyncPath) -> bool: 

79 return file_path.suffix.lower() in CACHEABLE_EXTENSIONS 

80 

81 

82async def sync_static( 

83 *, 

84 static_path: AsyncPath | None = None, 

85 file_patterns: list[str] | None = None, 

86 strategy: SyncStrategy | None = None, 

87 storage_bucket: str | None = None, 

88 exclude_patterns: list[str] | None = None, 

89) -> StaticSyncResult: 

90 config = _prepare_static_sync_config( 

91 static_path, file_patterns, strategy, exclude_patterns 

92 ) 

93 result = StaticSyncResult() 

94 

95 if storage_bucket is None: 

96 storage_bucket = await _get_default_static_bucket() 

97 

98 adapters = await _initialize_adapters(result) 

99 if not adapters: 

100 return result 

101 

102 static_files = await _discover_static_files( 

103 config["static_path"], 

104 config["file_patterns"], 

105 config["exclude_patterns"], 

106 ) 

107 if not static_files: 

108 debug("No static files found to sync") 

109 return result 

110 

111 debug(f"Found {len(static_files)} static files to sync") 

112 

113 await _sync_static_files( 

114 static_files, 

115 adapters, 

116 config["strategy"], 

117 storage_bucket, 

118 result, 

119 ) 

120 

121 debug( 

122 f"Static sync completed: {len(result.synced_items)} synced, {len(result.conflicts)} conflicts", 

123 ) 

124 

125 return result 

126 

127 

128def _prepare_static_sync_config( 

129 static_path: AsyncPath | None, 

130 file_patterns: list[str] | None, 

131 strategy: SyncStrategy | None, 

132 exclude_patterns: list[str] | None, 

133) -> dict[str, t.Any]: 

134 return { 

135 "static_path": static_path or AsyncPath("static"), 

136 "file_patterns": file_patterns 

137 or [ 

138 "*.css", 

139 "*.js", 

140 "*.png", 

141 "*.jpg", 

142 "*.jpeg", 

143 "*.gif", 

144 "*.svg", 

145 "*.ico", 

146 "*.woff", 

147 "*.woff2", 

148 "*.ttf", 

149 "*.eot", 

150 "*.otf", 

151 "*.webp", 

152 "*.avif", 

153 "*.pdf", 

154 "*.zip", 

155 "*.tar.gz", 

156 ], 

157 "strategy": strategy or SyncStrategy(), 

158 "exclude_patterns": exclude_patterns or ["*.tmp", "*.log", ".*", "__pycache__"], 

159 } 

160 

161 

162async def _initialize_adapters(result: StaticSyncResult) -> dict[str, t.Any] | None: 

163 try: 

164 from acb.depends import depends 

165 

166 storage = depends.get("storage") 

167 cache = depends.get("cache") 

168 if not storage: 

169 result.errors.append(Exception("Storage adapter not available")) 

170 return None 

171 

172 return {"storage": storage, "cache": cache} 

173 except Exception as e: 

174 result.errors.append(e) 

175 return None 

176 

177 

178async def _get_default_static_bucket() -> str: 

179 try: 

180 storage_config_path = AsyncPath("settings/storage.yml") 

181 if await storage_config_path.exists(): 

182 content = await storage_config_path.read_text() 

183 config = yaml.safe_load(content) 

184 if isinstance(config, dict): 

185 bucket_name = t.cast( 

186 str, config.get("buckets", {}).get("static", "static") 

187 ) 

188 else: 

189 bucket_name = "static" 

190 debug(f"Using static bucket from config: {bucket_name}") 

191 return bucket_name 

192 except Exception as e: 

193 debug(f"Could not load storage config, using default: {e}") 

194 debug("Using fallback static bucket: static") 

195 return "static" 

196 

197 

198async def _discover_static_files( 

199 static_path: AsyncPath, 

200 file_patterns: list[str], 

201 exclude_patterns: list[str], 

202) -> list[dict[str, t.Any]]: 

203 static_files: list[dict[str, t.Any]] = [] 

204 

205 if not await static_path.exists(): 

206 debug(f"Static path does not exist: {static_path}") 

207 return static_files 

208 

209 for pattern in file_patterns: 

210 await _discover_files_with_pattern( 

211 static_path, 

212 pattern, 

213 exclude_patterns, 

214 static_files, 

215 ) 

216 

217 return static_files 

218 

219 

220async def _discover_files_with_pattern( 

221 static_path: AsyncPath, 

222 pattern: str, 

223 exclude_patterns: list[str], 

224 static_files: list[dict[str, t.Any]], 

225) -> None: 

226 async for file_path in static_path.rglob(pattern): 

227 if await file_path.is_file(): 

228 if _should_exclude_file(file_path, exclude_patterns): 

229 continue 

230 

231 await _process_static_file( 

232 file_path, 

233 static_path, 

234 static_files, 

235 ) 

236 

237 

238def _should_exclude_file(file_path: AsyncPath, exclude_patterns: list[str]) -> bool: 

239 import fnmatch 

240 

241 file_name = file_path.name 

242 relative_path = str(file_path) 

243 for pattern in exclude_patterns: 

244 if fnmatch.fnmatch(file_name, pattern) or fnmatch.fnmatch( 

245 relative_path, pattern 

246 ): 

247 return True 

248 

249 return False 

250 

251 

252async def _process_static_file( 

253 file_path: AsyncPath, 

254 static_path: AsyncPath, 

255 static_files: list[dict[str, t.Any]], 

256) -> None: 

257 try: 

258 rel_path = file_path.relative_to(static_path) 

259 mime_type = _detect_mime_type(file_path) 

260 is_cacheable = _is_cacheable_file(file_path) 

261 

262 static_files.append( 

263 { 

264 "local_path": file_path, 

265 "relative_path": rel_path, 

266 "storage_path": str(rel_path), 

267 "mime_type": mime_type, 

268 "is_cacheable": is_cacheable, 

269 }, 

270 ) 

271 except ValueError: 

272 debug(f"Could not get relative path for {file_path}") 

273 

274 

275def _detect_mime_type(file_path: AsyncPath) -> str: 

276 import mimetypes 

277 

278 mime_type, _ = mimetypes.guess_type(str(file_path)) 

279 return mime_type or "application/octet-stream" 

280 

281 

282async def _sync_static_files( 

283 static_files: list[dict[str, t.Any]], 

284 adapters: dict[str, t.Any], 

285 strategy: SyncStrategy, 

286 storage_bucket: str, 

287 result: StaticSyncResult, 

288) -> None: 

289 for static_info in static_files: 

290 try: 

291 file_result = await _sync_single_static_file( 

292 static_info, 

293 adapters["storage"], 

294 adapters["cache"], 

295 strategy, 

296 storage_bucket, 

297 ) 

298 _accumulate_static_sync_results(file_result, result) 

299 

300 if file_result.get("synced"): 

301 result.assets_processed.append(static_info["storage_path"]) 

302 result.mime_types_detected[static_info["storage_path"]] = static_info[ 

303 "mime_type" 

304 ] 

305 

306 if static_info["is_cacheable"]: 

307 result.cacheable_assets.append(static_info["storage_path"]) 

308 else: 

309 result.non_cacheable_assets.append(static_info["storage_path"]) 

310 

311 except Exception as e: 

312 result.errors.append(e) 

313 debug(f"Error syncing static file {static_info['relative_path']}: {e}") 

314 

315 

316def _accumulate_static_sync_results( 

317 file_result: dict[str, t.Any], 

318 result: StaticSyncResult, 

319) -> None: 

320 if file_result.get("synced"): 

321 result.synced_items.extend(file_result["synced"]) 

322 if file_result.get("conflicts"): 

323 result.conflicts.extend(file_result["conflicts"]) 

324 if file_result.get("errors"): 

325 result.errors.extend(file_result["errors"]) 

326 if file_result.get("skipped"): 

327 result.skipped.extend(file_result["skipped"]) 

328 if file_result.get("backed_up"): 

329 result.backed_up.extend(file_result["backed_up"]) 

330 if file_result.get("cache_invalidated"): 

331 result.cache_invalidated.extend(file_result["cache_invalidated"]) 

332 if file_result.get("cache_cleared"): 

333 result.cache_cleared.extend(file_result["cache_cleared"]) 

334 

335 

336async def _sync_single_static_file( 

337 static_info: dict[str, t.Any], 

338 storage: t.Any, 

339 cache: t.Any, 

340 strategy: SyncStrategy, 

341 storage_bucket: str, 

342) -> dict[str, t.Any]: 

343 local_path = static_info["local_path"] 

344 storage_path = static_info["storage_path"] 

345 mime_type = static_info["mime_type"] 

346 is_cacheable = static_info["is_cacheable"] 

347 

348 result = _create_sync_result() 

349 

350 try: 

351 local_info, remote_info = await _get_file_infos( 

352 local_path, 

353 storage, 

354 storage_bucket, 

355 storage_path, 

356 ) 

357 

358 if not await _should_sync_file( 

359 local_info, 

360 remote_info, 

361 strategy, 

362 storage_path, 

363 result, 

364 ): 

365 return result 

366 

367 await _execute_sync_operation( 

368 local_path, 

369 storage, 

370 cache, 

371 storage_bucket, 

372 storage_path, 

373 local_info, 

374 remote_info, 

375 strategy, 

376 mime_type, 

377 is_cacheable, 

378 result, 

379 ) 

380 

381 except Exception as e: 

382 result["errors"].append(e) 

383 debug(f"Error in _sync_single_static_file for {storage_path}: {e}") 

384 

385 return result 

386 

387 

388def _create_sync_result() -> dict[str, t.Any]: 

389 return { 

390 "synced": [], 

391 "conflicts": [], 

392 "errors": [], 

393 "skipped": [], 

394 "backed_up": [], 

395 "cache_invalidated": [], 

396 "cache_cleared": [], 

397 } 

398 

399 

400async def _get_file_infos( 

401 local_path: t.Any, 

402 storage: t.Any, 

403 storage_bucket: str, 

404 storage_path: str, 

405) -> tuple[dict[str, t.Any], dict[str, t.Any]]: 

406 local_info = await get_file_info(Path(local_path)) 

407 remote_info = await _get_storage_file_info(storage, storage_bucket, storage_path) 

408 return local_info, remote_info 

409 

410 

411async def _should_sync_file( 

412 local_info: dict[str, t.Any], 

413 remote_info: dict[str, t.Any], 

414 strategy: SyncStrategy, 

415 storage_path: str, 

416 result: dict[str, t.Any], 

417) -> bool: 

418 sync_needed, reason = should_sync(local_info, remote_info, strategy.direction) 

419 if not sync_needed: 

420 result["skipped"].append(f"{storage_path} ({reason})") 

421 return False 

422 

423 debug(f"Syncing static file {storage_path}: {reason}") 

424 return True 

425 

426 

427async def _execute_sync_operation( 

428 local_path: t.Any, 

429 storage: t.Any, 

430 cache: t.Any, 

431 storage_bucket: str, 

432 storage_path: str, 

433 local_info: dict[str, t.Any], 

434 remote_info: dict[str, t.Any], 

435 strategy: SyncStrategy, 

436 mime_type: str, 

437 is_cacheable: bool, 

438 result: dict[str, t.Any], 

439) -> None: 

440 if _should_pull_static(strategy, local_info, remote_info): 

441 await _pull_static( 

442 local_path, 

443 storage, 

444 cache, 

445 storage_bucket, 

446 storage_path, 

447 strategy, 

448 is_cacheable, 

449 result, 

450 ) 

451 elif _should_push_static(strategy, local_info, remote_info): 

452 await _push_static( 

453 local_path, 

454 storage, 

455 cache, 

456 storage_bucket, 

457 storage_path, 

458 strategy, 

459 mime_type, 

460 is_cacheable, 

461 result, 

462 ) 

463 elif _has_bidirectional_conflict(strategy, local_info, remote_info): 

464 await _handle_static_conflict( 

465 local_path, 

466 storage, 

467 cache, 

468 storage_bucket, 

469 storage_path, 

470 local_info, 

471 remote_info, 

472 strategy, 

473 mime_type, 

474 is_cacheable, 

475 result, 

476 ) 

477 

478 

479def _should_pull_static( 

480 strategy: SyncStrategy, 

481 local_info: dict[str, t.Any], 

482 remote_info: dict[str, t.Any], 

483) -> bool: 

484 return strategy.direction == SyncDirection.PULL or ( 

485 strategy.direction == SyncDirection.BIDIRECTIONAL 

486 and remote_info["exists"] 

487 and (not local_info["exists"] or remote_info["mtime"] > local_info["mtime"]) 

488 ) 

489 

490 

491def _should_push_static( 

492 strategy: SyncStrategy, 

493 local_info: dict[str, t.Any], 

494 remote_info: dict[str, t.Any], 

495) -> bool: 

496 return strategy.direction == SyncDirection.PUSH or ( 

497 strategy.direction == SyncDirection.BIDIRECTIONAL 

498 and local_info["exists"] 

499 and (not remote_info["exists"] or local_info["mtime"] > remote_info["mtime"]) 

500 ) 

501 

502 

503def _has_bidirectional_conflict( 

504 strategy: SyncStrategy, 

505 local_info: dict[str, t.Any], 

506 remote_info: dict[str, t.Any], 

507) -> bool: 

508 return ( 

509 strategy.direction == SyncDirection.BIDIRECTIONAL 

510 and local_info["exists"] 

511 and remote_info["exists"] 

512 ) 

513 

514 

515async def _get_storage_file_info( 

516 storage: t.Any, 

517 bucket: str, 

518 file_path: str, 

519) -> dict[str, t.Any]: 

520 try: 

521 bucket_obj = getattr(storage, bucket, None) 

522 

523 if not bucket_obj: 

524 await storage._create_bucket(bucket) 

525 bucket_obj = getattr(storage, bucket) 

526 

527 exists = await bucket_obj.exists(file_path) 

528 

529 if not exists: 

530 return { 

531 "exists": False, 

532 "size": 0, 

533 "mtime": 0, 

534 "content_hash": None, 

535 } 

536 

537 content = await bucket_obj.read(file_path) 

538 metadata = await bucket_obj.stat(file_path) 

539 

540 import hashlib 

541 

542 content_hash = hashlib.blake2b(content).hexdigest() 

543 

544 return { 

545 "exists": True, 

546 "size": len(content), 

547 "mtime": metadata.get("mtime", 0), 

548 "content_hash": content_hash, 

549 "content": content, 

550 } 

551 

552 except Exception as e: 

553 debug(f"Error getting storage file info for {file_path}: {e}") 

554 return { 

555 "exists": False, 

556 "size": 0, 

557 "mtime": 0, 

558 "content_hash": None, 

559 "error": str(e), 

560 } 

561 

562 

563async def _pull_static( 

564 local_path: AsyncPath, 

565 storage: t.Any, 

566 cache: t.Any, 

567 bucket: str, 

568 storage_path: str, 

569 strategy: SyncStrategy, 

570 is_cacheable: bool, 

571 result: dict[str, t.Any], 

572) -> None: 

573 try: 

574 bucket_obj = getattr(storage, bucket) 

575 

576 if strategy.dry_run: 

577 debug(f"DRY RUN: Would pull {storage_path} to {local_path}") 

578 result["synced"].append(f"PULL(dry-run): {storage_path}") 

579 return 

580 

581 if await local_path.exists() and strategy.backup_on_conflict: 

582 backup_path = await create_backup(Path(local_path)) 

583 result["backed_up"].append(str(backup_path)) 

584 

585 content = await bucket_obj.read(storage_path) 

586 

587 await local_path.parent.mkdir(parents=True, exist_ok=True) 

588 await local_path.write_bytes(content) 

589 

590 result["synced"].append(f"PULL: {storage_path}") 

591 debug(f"Pulled static file from storage: {storage_path}") 

592 

593 if is_cacheable and cache: 

594 await _cache_static_file(cache, storage_path, content, result) 

595 

596 except Exception as e: 

597 result["errors"].append(e) 

598 debug(f"Error pulling static file {storage_path}: {e}") 

599 

600 

601async def _push_static( 

602 local_path: AsyncPath, 

603 storage: t.Any, 

604 cache: t.Any, 

605 bucket: str, 

606 storage_path: str, 

607 strategy: SyncStrategy, 

608 mime_type: str, 

609 is_cacheable: bool, 

610 result: dict[str, t.Any], 

611) -> None: 

612 try: 

613 bucket_obj = getattr(storage, bucket) 

614 

615 if strategy.dry_run: 

616 debug(f"DRY RUN: Would push {local_path} to {storage_path}") 

617 result["synced"].append(f"PUSH(dry-run): {storage_path}") 

618 return 

619 

620 content = await local_path.read_bytes() 

621 

622 metadata = {"content_type": mime_type} 

623 await bucket_obj.write(storage_path, content, metadata=metadata) 

624 

625 result["synced"].append(f"PUSH: {storage_path}") 

626 debug(f"Pushed static file to storage: {storage_path} (MIME: {mime_type})") 

627 

628 if is_cacheable and cache: 

629 await _cache_static_file(cache, storage_path, content, result) 

630 

631 except Exception as e: 

632 result["errors"].append(e) 

633 debug(f"Error pushing static file {storage_path}: {e}") 

634 

635 

636async def _handle_static_conflict( 

637 local_path: AsyncPath, 

638 storage: t.Any, 

639 cache: t.Any, 

640 bucket: str, 

641 storage_path: str, 

642 local_info: dict[str, t.Any], 

643 remote_info: dict[str, t.Any], 

644 strategy: SyncStrategy, 

645 mime_type: str, 

646 is_cacheable: bool, 

647 result: dict[str, t.Any], 

648) -> None: 

649 try: 

650 if strategy.conflict_strategy == ConflictStrategy.MANUAL: 

651 result["conflicts"].append( 

652 { 

653 "path": storage_path, 

654 "local_mtime": local_info["mtime"], 

655 "remote_mtime": remote_info["mtime"], 

656 "reason": "manual_resolution_required", 

657 }, 

658 ) 

659 return 

660 

661 resolved_content, resolution_reason = await resolve_conflict( 

662 Path(local_path), 

663 remote_info["content"], 

664 local_info["content"], 

665 strategy.conflict_strategy, 

666 local_info["mtime"], 

667 remote_info["mtime"], 

668 ) 

669 

670 if strategy.dry_run: 

671 debug( 

672 f"DRY RUN: Would resolve conflict for {storage_path}: {resolution_reason}", 

673 ) 

674 result["synced"].append( 

675 f"CONFLICT(dry-run): {storage_path} - {resolution_reason}", 

676 ) 

677 return 

678 

679 if ( 

680 strategy.backup_on_conflict 

681 or strategy.conflict_strategy == ConflictStrategy.BACKUP_BOTH 

682 ): 

683 backup_path = await create_backup(Path(local_path), "conflict") 

684 result["backed_up"].append(str(backup_path)) 

685 

686 if resolved_content == remote_info["content"]: 

687 await local_path.write_bytes(resolved_content) 

688 result["synced"].append( 

689 f"CONFLICT->REMOTE: {storage_path} - {resolution_reason}", 

690 ) 

691 else: 

692 bucket_obj = getattr(storage, bucket) 

693 metadata = {"content_type": mime_type} 

694 await bucket_obj.write(storage_path, resolved_content, metadata=metadata) 

695 result["synced"].append( 

696 f"CONFLICT->LOCAL: {storage_path} - {resolution_reason}", 

697 ) 

698 

699 if is_cacheable and cache: 

700 await _cache_static_file(cache, storage_path, resolved_content, result) 

701 

702 debug(f"Resolved static conflict: {storage_path} - {resolution_reason}") 

703 

704 except Exception as e: 

705 result["errors"].append(e) 

706 result["conflicts"].append( 

707 { 

708 "path": storage_path, 

709 "error": str(e), 

710 "reason": "resolution_failed", 

711 }, 

712 ) 

713 

714 

715async def _cache_static_file( 

716 cache: t.Any, 

717 storage_path: str, 

718 content: bytes, 

719 result: dict[str, t.Any], 

720) -> None: 

721 if not cache: 

722 return 

723 

724 try: 

725 cache_key = f"static:{storage_path}" 

726 await cache.set(cache_key, content, ttl=86400) 

727 result["cache_invalidated"].append(cache_key) 

728 debug(f"Cached static file: {storage_path}") 

729 except Exception as e: 

730 debug(f"Error caching static file {storage_path}: {e}") 

731 

732 pass 

733 

734 

735async def _validate_cache_dependencies() -> tuple[t.Any, t.Any, dict[str, t.Any]]: 

736 """Validate and return cache and storage dependencies.""" 

737 from acb.depends import depends 

738 

739 cache = depends.get("cache") 

740 storage = depends.get("storage") 

741 result: dict[str, t.Any] = { 

742 "warmed": [], 

743 "errors": [], 

744 "skipped": [], 

745 } 

746 

747 if not cache or not storage: 

748 result["errors"].append(Exception("Cache or storage not available")) 

749 return None, None, result 

750 

751 return cache, storage, result 

752 

753 

754async def _warm_single_static_file( 

755 static_path: str, 

756 cache: t.Any, 

757 storage: t.Any, 

758 cache_namespace: str, 

759 result: dict[str, t.Any], 

760) -> None: 

761 """Warm cache for a single static file.""" 

762 try: 

763 if not _is_cacheable_file(AsyncPath(static_path)): 

764 result["skipped"].append(f"{static_path} (not cacheable)") 

765 return 

766 

767 cache_key = f"{cache_namespace}:{static_path}" 

768 if await cache.exists(cache_key): 

769 result["skipped"].append(static_path) 

770 return 

771 

772 content = await storage.static.read(static_path) 

773 await cache.set(cache_key, content, ttl=86400) 

774 result["warmed"].append(static_path) 

775 

776 debug(f"Warmed cache for static file: {static_path}") 

777 

778 except Exception as e: 

779 result["errors"].append(f"{static_path}: {e}") 

780 debug(f"Error warming cache for static file {static_path}: {e}") 

781 

782 

783async def warm_static_cache( 

784 static_paths: list[str] | None = None, 

785 cache_namespace: str = "static", 

786) -> dict[str, t.Any]: 

787 result: dict[str, t.Any] = { 

788 "warmed": [], 

789 "errors": [], 

790 "skipped": [], 

791 } 

792 

793 if not static_paths: 

794 static_paths = [ 

795 "css/main.css", 

796 "css/app.css", 

797 "js/main.js", 

798 "js/app.js", 

799 ] 

800 

801 try: 

802 cache, storage, dep_result = await _validate_cache_dependencies() 

803 if not cache or not storage: 

804 return dep_result 

805 

806 result = dep_result 

807 

808 for static_path in static_paths: 

809 await _warm_single_static_file( 

810 static_path, cache, storage, cache_namespace, result 

811 ) 

812 

813 except Exception as e: 

814 result["errors"].append(str(e)) 

815 debug(f"Error in warm_static_cache: {e}") 

816 

817 return result 

818 

819 

820async def get_static_sync_status( 

821 static_path: AsyncPath | None = None, 

822 storage_bucket: str = "static", 

823) -> dict[str, t.Any]: 

824 if static_path is None: 

825 static_path = AsyncPath("static") 

826 

827 status: dict[str, t.Any] = { 

828 "total_static_files": 0, 

829 "in_sync": 0, 

830 "out_of_sync": 0, 

831 "local_only": 0, 

832 "remote_only": 0, 

833 "conflicts": 0, 

834 "details": [], 

835 } 

836 

837 try: 

838 storage = await _get_storage_adapter() 

839 if not storage: 

840 status["error"] = "Storage adapter not available" 

841 return status 

842 

843 static_files = await _discover_static_files( 

844 static_path, 

845 ["*.css", "*.js", "*.png", "*.jpg", "*.jpeg", "*.gif", "*.svg", "*.ico"], 

846 ["*.tmp", "*.log", ".*"], 

847 ) 

848 status["total_static_files"] = len(static_files) 

849 

850 await _process_static_files(static_files, storage, storage_bucket, status) 

851 

852 status["out_of_sync"] = ( 

853 status["conflicts"] + status["local_only"] + status["remote_only"] 

854 ) 

855 

856 except Exception as e: 

857 status["error"] = str(e) 

858 debug(f"Error getting static sync status: {e}") 

859 

860 return status 

861 

862 

863async def _get_storage_adapter() -> t.Any: 

864 """Get the storage adapter.""" 

865 from acb.depends import depends 

866 

867 return depends.get("storage") 

868 

869 

870async def _process_static_files( 

871 static_files: list[dict[str, t.Any]], 

872 storage: t.Any, 

873 storage_bucket: str, 

874 status: dict[str, t.Any], 

875) -> None: 

876 """Process all static files and update status.""" 

877 for static_info in static_files: 

878 local_info = await get_file_info(Path(static_info["local_path"])) 

879 remote_info = await _get_storage_file_info( 

880 storage, 

881 storage_bucket, 

882 static_info["storage_path"], 

883 ) 

884 

885 file_status = _create_file_status(static_info, local_info, remote_info) 

886 _update_status_counters(local_info, remote_info, file_status, status) 

887 status["details"].append(file_status) 

888 

889 

890def _create_file_status( 

891 static_info: dict[str, t.Any], 

892 local_info: dict[str, t.Any], 

893 remote_info: dict[str, t.Any], 

894) -> dict[str, t.Any]: 

895 """Create file status dictionary.""" 

896 file_status: dict[str, t.Any] = { 

897 "path": static_info["storage_path"], 

898 "mime_type": static_info["mime_type"], 

899 "local_exists": local_info["exists"], 

900 "remote_exists": remote_info["exists"], 

901 } 

902 

903 # Determine sync status 

904 if local_info["exists"] and remote_info["exists"]: 

905 if local_info["content_hash"] == remote_info["content_hash"]: 

906 file_status["status"] = "in_sync" 

907 else: 

908 file_status["status"] = "conflict" 

909 file_status["local_mtime"] = local_info["mtime"] 

910 file_status["remote_mtime"] = remote_info["mtime"] 

911 elif local_info["exists"]: 

912 file_status["status"] = "local_only" 

913 elif remote_info["exists"]: 

914 file_status["status"] = "remote_only" 

915 else: 

916 file_status["status"] = "missing" 

917 

918 return file_status 

919 

920 

921def _update_status_counters( 

922 local_info: dict[str, t.Any], 

923 remote_info: dict[str, t.Any], 

924 file_status: dict[str, t.Any], 

925 status: dict[str, t.Any], 

926) -> None: 

927 """Update status counters based on file status.""" 

928 if local_info["exists"] and remote_info["exists"]: 

929 if local_info["content_hash"] == remote_info["content_hash"]: 

930 status["in_sync"] += 1 

931 else: 

932 status["conflicts"] += 1 

933 elif local_info["exists"]: 

934 status["local_only"] += 1 

935 elif remote_info["exists"]: 

936 status["remote_only"] += 1 

937 

938 

939async def backup_static_files( 

940 static_path: AsyncPath | None = None, 

941 backup_suffix: str | None = None, 

942) -> dict[str, t.Any]: 

943 static_path = static_path or AsyncPath("static") 

944 backup_suffix = backup_suffix or _generate_backup_suffix() 

945 

946 result = _create_backup_result() 

947 

948 try: 

949 if not await static_path.exists(): 

950 result["errors"].append(f"Static path does not exist: {static_path}") 

951 return result 

952 

953 await _backup_static_files_with_patterns(static_path, backup_suffix, result) 

954 

955 except Exception as e: 

956 result["errors"].append(str(e)) 

957 debug(f"Error in backup_static_files: {e}") 

958 

959 return result 

960 

961 

962def _generate_backup_suffix() -> str: 

963 import time 

964 

965 timestamp = int(time.time()) 

966 return f"backup_{timestamp}" 

967 

968 

969def _create_backup_result() -> dict[str, t.Any]: 

970 return { 

971 "backed_up": [], 

972 "errors": [], 

973 "skipped": [], 

974 } 

975 

976 

977async def _backup_static_files_with_patterns( 

978 static_path: AsyncPath, 

979 backup_suffix: str, 

980 result: dict[str, t.Any], 

981) -> None: 

982 patterns = [ 

983 "*.css", 

984 "*.js", 

985 "*.png", 

986 "*.jpg", 

987 "*.jpeg", 

988 "*.gif", 

989 "*.svg", 

990 "*.ico", 

991 "*.woff", 

992 "*.woff2", 

993 "*.ttf", 

994 "*.eot", 

995 "*.otf", 

996 "*.webp", 

997 "*.avif", 

998 ] 

999 

1000 for pattern in patterns: 

1001 await _backup_files_with_pattern(static_path, pattern, backup_suffix, result) 

1002 

1003 

1004async def _backup_files_with_pattern( 

1005 static_path: AsyncPath, 

1006 pattern: str, 

1007 backup_suffix: str, 

1008 result: dict[str, t.Any], 

1009) -> None: 

1010 async for file_path in static_path.rglob(pattern): 

1011 if await file_path.is_file(): 

1012 await _backup_single_file(file_path, backup_suffix, result) 

1013 

1014 

1015async def _backup_single_file( 

1016 file_path: AsyncPath, 

1017 backup_suffix: str, 

1018 result: dict[str, t.Any], 

1019) -> None: 

1020 try: 

1021 backup_path = await create_backup(Path(file_path), backup_suffix) 

1022 result["backed_up"].append(str(backup_path)) 

1023 except Exception as e: 

1024 result["errors"].append(f"{file_path}: {e}")