Coverage for fastblocks/actions/sync/static.py: 13%
381 statements
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-09 00:47 -0700
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-09 00:47 -0700
1"""Static files synchronization between filesystem, cloud storage, and selective caching.
3Static sync uses selective caching based on file type:
4- Text-based files (CSS, JS, MD, TXT) are cached for performance
5- Binary files (images, fonts, media) sync to storage only to avoid cache bloat
6"""
8import typing as t
9from pathlib import Path
11import yaml
12from acb.debug import debug
13from anyio import Path as AsyncPath
15from .strategies import (
16 ConflictStrategy,
17 SyncDirection,
18 SyncResult,
19 SyncStrategy,
20 create_backup,
21 get_file_info,
22 resolve_conflict,
23 should_sync,
24)
27class StaticSyncResult(SyncResult):
28 def __init__(
29 self,
30 *,
31 assets_processed: list[str] | None = None,
32 mime_types_detected: dict[str, str] | None = None,
33 cache_invalidated: list[str] | None = None,
34 cache_cleared: list[str] | None = None,
35 cacheable_assets: list[str] | None = None,
36 non_cacheable_assets: list[str] | None = None,
37 **kwargs: t.Any,
38 ) -> None:
39 super().__init__(**kwargs)
40 self.assets_processed = assets_processed if assets_processed is not None else []
41 self.mime_types_detected = (
42 mime_types_detected if mime_types_detected is not None else {}
43 )
44 self.cache_invalidated = (
45 cache_invalidated if cache_invalidated is not None else []
46 )
47 self.cache_cleared = cache_cleared if cache_cleared is not None else []
48 self.cacheable_assets = cacheable_assets if cacheable_assets is not None else []
49 self.non_cacheable_assets = (
50 non_cacheable_assets if non_cacheable_assets is not None else []
51 )
54CACHEABLE_EXTENSIONS = {".css", ".js", ".md", ".txt"}
55NON_CACHEABLE_EXTENSIONS = {
56 ".png",
57 ".jpg",
58 ".jpeg",
59 ".gif",
60 ".svg",
61 ".ico",
62 ".webp",
63 ".avif",
64 ".mp4",
65 ".mov",
66 ".mp3",
67 ".wav",
68 ".pdf",
69 ".zip",
70 ".woff",
71 ".woff2",
72 ".ttf",
73 ".eot",
74 ".otf",
75}
78def _is_cacheable_file(file_path: AsyncPath) -> bool:
79 return file_path.suffix.lower() in CACHEABLE_EXTENSIONS
82async def sync_static(
83 *,
84 static_path: AsyncPath | None = None,
85 file_patterns: list[str] | None = None,
86 strategy: SyncStrategy | None = None,
87 storage_bucket: str | None = None,
88 exclude_patterns: list[str] | None = None,
89) -> StaticSyncResult:
90 config = _prepare_static_sync_config(
91 static_path, file_patterns, strategy, exclude_patterns
92 )
93 result = StaticSyncResult()
95 if storage_bucket is None:
96 storage_bucket = await _get_default_static_bucket()
98 adapters = await _initialize_adapters(result)
99 if not adapters:
100 return result
102 static_files = await _discover_static_files(
103 config["static_path"],
104 config["file_patterns"],
105 config["exclude_patterns"],
106 )
107 if not static_files:
108 debug("No static files found to sync")
109 return result
111 debug(f"Found {len(static_files)} static files to sync")
113 await _sync_static_files(
114 static_files,
115 adapters,
116 config["strategy"],
117 storage_bucket,
118 result,
119 )
121 debug(
122 f"Static sync completed: {len(result.synced_items)} synced, {len(result.conflicts)} conflicts",
123 )
125 return result
128def _prepare_static_sync_config(
129 static_path: AsyncPath | None,
130 file_patterns: list[str] | None,
131 strategy: SyncStrategy | None,
132 exclude_patterns: list[str] | None,
133) -> dict[str, t.Any]:
134 return {
135 "static_path": static_path or AsyncPath("static"),
136 "file_patterns": file_patterns
137 or [
138 "*.css",
139 "*.js",
140 "*.png",
141 "*.jpg",
142 "*.jpeg",
143 "*.gif",
144 "*.svg",
145 "*.ico",
146 "*.woff",
147 "*.woff2",
148 "*.ttf",
149 "*.eot",
150 "*.otf",
151 "*.webp",
152 "*.avif",
153 "*.pdf",
154 "*.zip",
155 "*.tar.gz",
156 ],
157 "strategy": strategy or SyncStrategy(),
158 "exclude_patterns": exclude_patterns or ["*.tmp", "*.log", ".*", "__pycache__"],
159 }
162async def _initialize_adapters(result: StaticSyncResult) -> dict[str, t.Any] | None:
163 try:
164 from acb.depends import depends
166 storage = depends.get("storage")
167 cache = depends.get("cache")
168 if not storage:
169 result.errors.append(Exception("Storage adapter not available"))
170 return None
172 return {"storage": storage, "cache": cache}
173 except Exception as e:
174 result.errors.append(e)
175 return None
178async def _get_default_static_bucket() -> str:
179 try:
180 storage_config_path = AsyncPath("settings/storage.yml")
181 if await storage_config_path.exists():
182 content = await storage_config_path.read_text()
183 config = yaml.safe_load(content)
184 if isinstance(config, dict):
185 bucket_name = t.cast(
186 str, config.get("buckets", {}).get("static", "static")
187 )
188 else:
189 bucket_name = "static"
190 debug(f"Using static bucket from config: {bucket_name}")
191 return bucket_name
192 except Exception as e:
193 debug(f"Could not load storage config, using default: {e}")
194 debug("Using fallback static bucket: static")
195 return "static"
198async def _discover_static_files(
199 static_path: AsyncPath,
200 file_patterns: list[str],
201 exclude_patterns: list[str],
202) -> list[dict[str, t.Any]]:
203 static_files: list[dict[str, t.Any]] = []
205 if not await static_path.exists():
206 debug(f"Static path does not exist: {static_path}")
207 return static_files
209 for pattern in file_patterns:
210 await _discover_files_with_pattern(
211 static_path,
212 pattern,
213 exclude_patterns,
214 static_files,
215 )
217 return static_files
220async def _discover_files_with_pattern(
221 static_path: AsyncPath,
222 pattern: str,
223 exclude_patterns: list[str],
224 static_files: list[dict[str, t.Any]],
225) -> None:
226 async for file_path in static_path.rglob(pattern):
227 if await file_path.is_file():
228 if _should_exclude_file(file_path, exclude_patterns):
229 continue
231 await _process_static_file(
232 file_path,
233 static_path,
234 static_files,
235 )
238def _should_exclude_file(file_path: AsyncPath, exclude_patterns: list[str]) -> bool:
239 import fnmatch
241 file_name = file_path.name
242 relative_path = str(file_path)
243 for pattern in exclude_patterns:
244 if fnmatch.fnmatch(file_name, pattern) or fnmatch.fnmatch(
245 relative_path, pattern
246 ):
247 return True
249 return False
252async def _process_static_file(
253 file_path: AsyncPath,
254 static_path: AsyncPath,
255 static_files: list[dict[str, t.Any]],
256) -> None:
257 try:
258 rel_path = file_path.relative_to(static_path)
259 mime_type = _detect_mime_type(file_path)
260 is_cacheable = _is_cacheable_file(file_path)
262 static_files.append(
263 {
264 "local_path": file_path,
265 "relative_path": rel_path,
266 "storage_path": str(rel_path),
267 "mime_type": mime_type,
268 "is_cacheable": is_cacheable,
269 },
270 )
271 except ValueError:
272 debug(f"Could not get relative path for {file_path}")
275def _detect_mime_type(file_path: AsyncPath) -> str:
276 import mimetypes
278 mime_type, _ = mimetypes.guess_type(str(file_path))
279 return mime_type or "application/octet-stream"
282async def _sync_static_files(
283 static_files: list[dict[str, t.Any]],
284 adapters: dict[str, t.Any],
285 strategy: SyncStrategy,
286 storage_bucket: str,
287 result: StaticSyncResult,
288) -> None:
289 for static_info in static_files:
290 try:
291 file_result = await _sync_single_static_file(
292 static_info,
293 adapters["storage"],
294 adapters["cache"],
295 strategy,
296 storage_bucket,
297 )
298 _accumulate_static_sync_results(file_result, result)
300 if file_result.get("synced"):
301 result.assets_processed.append(static_info["storage_path"])
302 result.mime_types_detected[static_info["storage_path"]] = static_info[
303 "mime_type"
304 ]
306 if static_info["is_cacheable"]:
307 result.cacheable_assets.append(static_info["storage_path"])
308 else:
309 result.non_cacheable_assets.append(static_info["storage_path"])
311 except Exception as e:
312 result.errors.append(e)
313 debug(f"Error syncing static file {static_info['relative_path']}: {e}")
316def _accumulate_static_sync_results(
317 file_result: dict[str, t.Any],
318 result: StaticSyncResult,
319) -> None:
320 if file_result.get("synced"):
321 result.synced_items.extend(file_result["synced"])
322 if file_result.get("conflicts"):
323 result.conflicts.extend(file_result["conflicts"])
324 if file_result.get("errors"):
325 result.errors.extend(file_result["errors"])
326 if file_result.get("skipped"):
327 result.skipped.extend(file_result["skipped"])
328 if file_result.get("backed_up"):
329 result.backed_up.extend(file_result["backed_up"])
330 if file_result.get("cache_invalidated"):
331 result.cache_invalidated.extend(file_result["cache_invalidated"])
332 if file_result.get("cache_cleared"):
333 result.cache_cleared.extend(file_result["cache_cleared"])
336async def _sync_single_static_file(
337 static_info: dict[str, t.Any],
338 storage: t.Any,
339 cache: t.Any,
340 strategy: SyncStrategy,
341 storage_bucket: str,
342) -> dict[str, t.Any]:
343 local_path = static_info["local_path"]
344 storage_path = static_info["storage_path"]
345 mime_type = static_info["mime_type"]
346 is_cacheable = static_info["is_cacheable"]
348 result = _create_sync_result()
350 try:
351 local_info, remote_info = await _get_file_infos(
352 local_path,
353 storage,
354 storage_bucket,
355 storage_path,
356 )
358 if not await _should_sync_file(
359 local_info,
360 remote_info,
361 strategy,
362 storage_path,
363 result,
364 ):
365 return result
367 await _execute_sync_operation(
368 local_path,
369 storage,
370 cache,
371 storage_bucket,
372 storage_path,
373 local_info,
374 remote_info,
375 strategy,
376 mime_type,
377 is_cacheable,
378 result,
379 )
381 except Exception as e:
382 result["errors"].append(e)
383 debug(f"Error in _sync_single_static_file for {storage_path}: {e}")
385 return result
388def _create_sync_result() -> dict[str, t.Any]:
389 return {
390 "synced": [],
391 "conflicts": [],
392 "errors": [],
393 "skipped": [],
394 "backed_up": [],
395 "cache_invalidated": [],
396 "cache_cleared": [],
397 }
400async def _get_file_infos(
401 local_path: t.Any,
402 storage: t.Any,
403 storage_bucket: str,
404 storage_path: str,
405) -> tuple[dict[str, t.Any], dict[str, t.Any]]:
406 local_info = await get_file_info(Path(local_path))
407 remote_info = await _get_storage_file_info(storage, storage_bucket, storage_path)
408 return local_info, remote_info
411async def _should_sync_file(
412 local_info: dict[str, t.Any],
413 remote_info: dict[str, t.Any],
414 strategy: SyncStrategy,
415 storage_path: str,
416 result: dict[str, t.Any],
417) -> bool:
418 sync_needed, reason = should_sync(local_info, remote_info, strategy.direction)
419 if not sync_needed:
420 result["skipped"].append(f"{storage_path} ({reason})")
421 return False
423 debug(f"Syncing static file {storage_path}: {reason}")
424 return True
427async def _execute_sync_operation(
428 local_path: t.Any,
429 storage: t.Any,
430 cache: t.Any,
431 storage_bucket: str,
432 storage_path: str,
433 local_info: dict[str, t.Any],
434 remote_info: dict[str, t.Any],
435 strategy: SyncStrategy,
436 mime_type: str,
437 is_cacheable: bool,
438 result: dict[str, t.Any],
439) -> None:
440 if _should_pull_static(strategy, local_info, remote_info):
441 await _pull_static(
442 local_path,
443 storage,
444 cache,
445 storage_bucket,
446 storage_path,
447 strategy,
448 is_cacheable,
449 result,
450 )
451 elif _should_push_static(strategy, local_info, remote_info):
452 await _push_static(
453 local_path,
454 storage,
455 cache,
456 storage_bucket,
457 storage_path,
458 strategy,
459 mime_type,
460 is_cacheable,
461 result,
462 )
463 elif _has_bidirectional_conflict(strategy, local_info, remote_info):
464 await _handle_static_conflict(
465 local_path,
466 storage,
467 cache,
468 storage_bucket,
469 storage_path,
470 local_info,
471 remote_info,
472 strategy,
473 mime_type,
474 is_cacheable,
475 result,
476 )
479def _should_pull_static(
480 strategy: SyncStrategy,
481 local_info: dict[str, t.Any],
482 remote_info: dict[str, t.Any],
483) -> bool:
484 return strategy.direction == SyncDirection.PULL or (
485 strategy.direction == SyncDirection.BIDIRECTIONAL
486 and remote_info["exists"]
487 and (not local_info["exists"] or remote_info["mtime"] > local_info["mtime"])
488 )
491def _should_push_static(
492 strategy: SyncStrategy,
493 local_info: dict[str, t.Any],
494 remote_info: dict[str, t.Any],
495) -> bool:
496 return strategy.direction == SyncDirection.PUSH or (
497 strategy.direction == SyncDirection.BIDIRECTIONAL
498 and local_info["exists"]
499 and (not remote_info["exists"] or local_info["mtime"] > remote_info["mtime"])
500 )
503def _has_bidirectional_conflict(
504 strategy: SyncStrategy,
505 local_info: dict[str, t.Any],
506 remote_info: dict[str, t.Any],
507) -> bool:
508 return (
509 strategy.direction == SyncDirection.BIDIRECTIONAL
510 and local_info["exists"]
511 and remote_info["exists"]
512 )
515async def _get_storage_file_info(
516 storage: t.Any,
517 bucket: str,
518 file_path: str,
519) -> dict[str, t.Any]:
520 try:
521 bucket_obj = getattr(storage, bucket, None)
523 if not bucket_obj:
524 await storage._create_bucket(bucket)
525 bucket_obj = getattr(storage, bucket)
527 exists = await bucket_obj.exists(file_path)
529 if not exists:
530 return {
531 "exists": False,
532 "size": 0,
533 "mtime": 0,
534 "content_hash": None,
535 }
537 content = await bucket_obj.read(file_path)
538 metadata = await bucket_obj.stat(file_path)
540 import hashlib
542 content_hash = hashlib.blake2b(content).hexdigest()
544 return {
545 "exists": True,
546 "size": len(content),
547 "mtime": metadata.get("mtime", 0),
548 "content_hash": content_hash,
549 "content": content,
550 }
552 except Exception as e:
553 debug(f"Error getting storage file info for {file_path}: {e}")
554 return {
555 "exists": False,
556 "size": 0,
557 "mtime": 0,
558 "content_hash": None,
559 "error": str(e),
560 }
563async def _pull_static(
564 local_path: AsyncPath,
565 storage: t.Any,
566 cache: t.Any,
567 bucket: str,
568 storage_path: str,
569 strategy: SyncStrategy,
570 is_cacheable: bool,
571 result: dict[str, t.Any],
572) -> None:
573 try:
574 bucket_obj = getattr(storage, bucket)
576 if strategy.dry_run:
577 debug(f"DRY RUN: Would pull {storage_path} to {local_path}")
578 result["synced"].append(f"PULL(dry-run): {storage_path}")
579 return
581 if await local_path.exists() and strategy.backup_on_conflict:
582 backup_path = await create_backup(Path(local_path))
583 result["backed_up"].append(str(backup_path))
585 content = await bucket_obj.read(storage_path)
587 await local_path.parent.mkdir(parents=True, exist_ok=True)
588 await local_path.write_bytes(content)
590 result["synced"].append(f"PULL: {storage_path}")
591 debug(f"Pulled static file from storage: {storage_path}")
593 if is_cacheable and cache:
594 await _cache_static_file(cache, storage_path, content, result)
596 except Exception as e:
597 result["errors"].append(e)
598 debug(f"Error pulling static file {storage_path}: {e}")
601async def _push_static(
602 local_path: AsyncPath,
603 storage: t.Any,
604 cache: t.Any,
605 bucket: str,
606 storage_path: str,
607 strategy: SyncStrategy,
608 mime_type: str,
609 is_cacheable: bool,
610 result: dict[str, t.Any],
611) -> None:
612 try:
613 bucket_obj = getattr(storage, bucket)
615 if strategy.dry_run:
616 debug(f"DRY RUN: Would push {local_path} to {storage_path}")
617 result["synced"].append(f"PUSH(dry-run): {storage_path}")
618 return
620 content = await local_path.read_bytes()
622 metadata = {"content_type": mime_type}
623 await bucket_obj.write(storage_path, content, metadata=metadata)
625 result["synced"].append(f"PUSH: {storage_path}")
626 debug(f"Pushed static file to storage: {storage_path} (MIME: {mime_type})")
628 if is_cacheable and cache:
629 await _cache_static_file(cache, storage_path, content, result)
631 except Exception as e:
632 result["errors"].append(e)
633 debug(f"Error pushing static file {storage_path}: {e}")
636async def _handle_static_conflict(
637 local_path: AsyncPath,
638 storage: t.Any,
639 cache: t.Any,
640 bucket: str,
641 storage_path: str,
642 local_info: dict[str, t.Any],
643 remote_info: dict[str, t.Any],
644 strategy: SyncStrategy,
645 mime_type: str,
646 is_cacheable: bool,
647 result: dict[str, t.Any],
648) -> None:
649 try:
650 if strategy.conflict_strategy == ConflictStrategy.MANUAL:
651 result["conflicts"].append(
652 {
653 "path": storage_path,
654 "local_mtime": local_info["mtime"],
655 "remote_mtime": remote_info["mtime"],
656 "reason": "manual_resolution_required",
657 },
658 )
659 return
661 resolved_content, resolution_reason = await resolve_conflict(
662 Path(local_path),
663 remote_info["content"],
664 local_info["content"],
665 strategy.conflict_strategy,
666 local_info["mtime"],
667 remote_info["mtime"],
668 )
670 if strategy.dry_run:
671 debug(
672 f"DRY RUN: Would resolve conflict for {storage_path}: {resolution_reason}",
673 )
674 result["synced"].append(
675 f"CONFLICT(dry-run): {storage_path} - {resolution_reason}",
676 )
677 return
679 if (
680 strategy.backup_on_conflict
681 or strategy.conflict_strategy == ConflictStrategy.BACKUP_BOTH
682 ):
683 backup_path = await create_backup(Path(local_path), "conflict")
684 result["backed_up"].append(str(backup_path))
686 if resolved_content == remote_info["content"]:
687 await local_path.write_bytes(resolved_content)
688 result["synced"].append(
689 f"CONFLICT->REMOTE: {storage_path} - {resolution_reason}",
690 )
691 else:
692 bucket_obj = getattr(storage, bucket)
693 metadata = {"content_type": mime_type}
694 await bucket_obj.write(storage_path, resolved_content, metadata=metadata)
695 result["synced"].append(
696 f"CONFLICT->LOCAL: {storage_path} - {resolution_reason}",
697 )
699 if is_cacheable and cache:
700 await _cache_static_file(cache, storage_path, resolved_content, result)
702 debug(f"Resolved static conflict: {storage_path} - {resolution_reason}")
704 except Exception as e:
705 result["errors"].append(e)
706 result["conflicts"].append(
707 {
708 "path": storage_path,
709 "error": str(e),
710 "reason": "resolution_failed",
711 },
712 )
715async def _cache_static_file(
716 cache: t.Any,
717 storage_path: str,
718 content: bytes,
719 result: dict[str, t.Any],
720) -> None:
721 if not cache:
722 return
724 try:
725 cache_key = f"static:{storage_path}"
726 await cache.set(cache_key, content, ttl=86400)
727 result["cache_invalidated"].append(cache_key)
728 debug(f"Cached static file: {storage_path}")
729 except Exception as e:
730 debug(f"Error caching static file {storage_path}: {e}")
732 pass
735async def _validate_cache_dependencies() -> tuple[t.Any, t.Any, dict[str, t.Any]]:
736 """Validate and return cache and storage dependencies."""
737 from acb.depends import depends
739 cache = depends.get("cache")
740 storage = depends.get("storage")
741 result: dict[str, t.Any] = {
742 "warmed": [],
743 "errors": [],
744 "skipped": [],
745 }
747 if not cache or not storage:
748 result["errors"].append(Exception("Cache or storage not available"))
749 return None, None, result
751 return cache, storage, result
754async def _warm_single_static_file(
755 static_path: str,
756 cache: t.Any,
757 storage: t.Any,
758 cache_namespace: str,
759 result: dict[str, t.Any],
760) -> None:
761 """Warm cache for a single static file."""
762 try:
763 if not _is_cacheable_file(AsyncPath(static_path)):
764 result["skipped"].append(f"{static_path} (not cacheable)")
765 return
767 cache_key = f"{cache_namespace}:{static_path}"
768 if await cache.exists(cache_key):
769 result["skipped"].append(static_path)
770 return
772 content = await storage.static.read(static_path)
773 await cache.set(cache_key, content, ttl=86400)
774 result["warmed"].append(static_path)
776 debug(f"Warmed cache for static file: {static_path}")
778 except Exception as e:
779 result["errors"].append(f"{static_path}: {e}")
780 debug(f"Error warming cache for static file {static_path}: {e}")
783async def warm_static_cache(
784 static_paths: list[str] | None = None,
785 cache_namespace: str = "static",
786) -> dict[str, t.Any]:
787 result: dict[str, t.Any] = {
788 "warmed": [],
789 "errors": [],
790 "skipped": [],
791 }
793 if not static_paths:
794 static_paths = [
795 "css/main.css",
796 "css/app.css",
797 "js/main.js",
798 "js/app.js",
799 ]
801 try:
802 cache, storage, dep_result = await _validate_cache_dependencies()
803 if not cache or not storage:
804 return dep_result
806 result = dep_result
808 for static_path in static_paths:
809 await _warm_single_static_file(
810 static_path, cache, storage, cache_namespace, result
811 )
813 except Exception as e:
814 result["errors"].append(str(e))
815 debug(f"Error in warm_static_cache: {e}")
817 return result
820async def get_static_sync_status(
821 static_path: AsyncPath | None = None,
822 storage_bucket: str = "static",
823) -> dict[str, t.Any]:
824 if static_path is None:
825 static_path = AsyncPath("static")
827 status: dict[str, t.Any] = {
828 "total_static_files": 0,
829 "in_sync": 0,
830 "out_of_sync": 0,
831 "local_only": 0,
832 "remote_only": 0,
833 "conflicts": 0,
834 "details": [],
835 }
837 try:
838 storage = await _get_storage_adapter()
839 if not storage:
840 status["error"] = "Storage adapter not available"
841 return status
843 static_files = await _discover_static_files(
844 static_path,
845 ["*.css", "*.js", "*.png", "*.jpg", "*.jpeg", "*.gif", "*.svg", "*.ico"],
846 ["*.tmp", "*.log", ".*"],
847 )
848 status["total_static_files"] = len(static_files)
850 await _process_static_files(static_files, storage, storage_bucket, status)
852 status["out_of_sync"] = (
853 status["conflicts"] + status["local_only"] + status["remote_only"]
854 )
856 except Exception as e:
857 status["error"] = str(e)
858 debug(f"Error getting static sync status: {e}")
860 return status
863async def _get_storage_adapter() -> t.Any:
864 """Get the storage adapter."""
865 from acb.depends import depends
867 return depends.get("storage")
870async def _process_static_files(
871 static_files: list[dict[str, t.Any]],
872 storage: t.Any,
873 storage_bucket: str,
874 status: dict[str, t.Any],
875) -> None:
876 """Process all static files and update status."""
877 for static_info in static_files:
878 local_info = await get_file_info(Path(static_info["local_path"]))
879 remote_info = await _get_storage_file_info(
880 storage,
881 storage_bucket,
882 static_info["storage_path"],
883 )
885 file_status = _create_file_status(static_info, local_info, remote_info)
886 _update_status_counters(local_info, remote_info, file_status, status)
887 status["details"].append(file_status)
890def _create_file_status(
891 static_info: dict[str, t.Any],
892 local_info: dict[str, t.Any],
893 remote_info: dict[str, t.Any],
894) -> dict[str, t.Any]:
895 """Create file status dictionary."""
896 file_status: dict[str, t.Any] = {
897 "path": static_info["storage_path"],
898 "mime_type": static_info["mime_type"],
899 "local_exists": local_info["exists"],
900 "remote_exists": remote_info["exists"],
901 }
903 # Determine sync status
904 if local_info["exists"] and remote_info["exists"]:
905 if local_info["content_hash"] == remote_info["content_hash"]:
906 file_status["status"] = "in_sync"
907 else:
908 file_status["status"] = "conflict"
909 file_status["local_mtime"] = local_info["mtime"]
910 file_status["remote_mtime"] = remote_info["mtime"]
911 elif local_info["exists"]:
912 file_status["status"] = "local_only"
913 elif remote_info["exists"]:
914 file_status["status"] = "remote_only"
915 else:
916 file_status["status"] = "missing"
918 return file_status
921def _update_status_counters(
922 local_info: dict[str, t.Any],
923 remote_info: dict[str, t.Any],
924 file_status: dict[str, t.Any],
925 status: dict[str, t.Any],
926) -> None:
927 """Update status counters based on file status."""
928 if local_info["exists"] and remote_info["exists"]:
929 if local_info["content_hash"] == remote_info["content_hash"]:
930 status["in_sync"] += 1
931 else:
932 status["conflicts"] += 1
933 elif local_info["exists"]:
934 status["local_only"] += 1
935 elif remote_info["exists"]:
936 status["remote_only"] += 1
939async def backup_static_files(
940 static_path: AsyncPath | None = None,
941 backup_suffix: str | None = None,
942) -> dict[str, t.Any]:
943 static_path = static_path or AsyncPath("static")
944 backup_suffix = backup_suffix or _generate_backup_suffix()
946 result = _create_backup_result()
948 try:
949 if not await static_path.exists():
950 result["errors"].append(f"Static path does not exist: {static_path}")
951 return result
953 await _backup_static_files_with_patterns(static_path, backup_suffix, result)
955 except Exception as e:
956 result["errors"].append(str(e))
957 debug(f"Error in backup_static_files: {e}")
959 return result
962def _generate_backup_suffix() -> str:
963 import time
965 timestamp = int(time.time())
966 return f"backup_{timestamp}"
969def _create_backup_result() -> dict[str, t.Any]:
970 return {
971 "backed_up": [],
972 "errors": [],
973 "skipped": [],
974 }
977async def _backup_static_files_with_patterns(
978 static_path: AsyncPath,
979 backup_suffix: str,
980 result: dict[str, t.Any],
981) -> None:
982 patterns = [
983 "*.css",
984 "*.js",
985 "*.png",
986 "*.jpg",
987 "*.jpeg",
988 "*.gif",
989 "*.svg",
990 "*.ico",
991 "*.woff",
992 "*.woff2",
993 "*.ttf",
994 "*.eot",
995 "*.otf",
996 "*.webp",
997 "*.avif",
998 ]
1000 for pattern in patterns:
1001 await _backup_files_with_pattern(static_path, pattern, backup_suffix, result)
1004async def _backup_files_with_pattern(
1005 static_path: AsyncPath,
1006 pattern: str,
1007 backup_suffix: str,
1008 result: dict[str, t.Any],
1009) -> None:
1010 async for file_path in static_path.rglob(pattern):
1011 if await file_path.is_file():
1012 await _backup_single_file(file_path, backup_suffix, result)
1015async def _backup_single_file(
1016 file_path: AsyncPath,
1017 backup_suffix: str,
1018 result: dict[str, t.Any],
1019) -> None:
1020 try:
1021 backup_path = await create_backup(Path(file_path), backup_suffix)
1022 result["backed_up"].append(str(backup_path))
1023 except Exception as e:
1024 result["errors"].append(f"{file_path}: {e}")